return self->vk_buffer;
}
+gsize
+gsk_vulkan_buffer_get_size (GskVulkanBuffer *self)
+{
+ return self->size;
+}
+
guchar *
gsk_vulkan_buffer_map (GskVulkanBuffer *self)
{
void gsk_vulkan_buffer_free (GskVulkanBuffer *buffer);
VkBuffer gsk_vulkan_buffer_get_buffer (GskVulkanBuffer *self);
+gsize gsk_vulkan_buffer_get_size (GskVulkanBuffer *self);
guchar * gsk_vulkan_buffer_map (GskVulkanBuffer *self);
void gsk_vulkan_buffer_unmap (GskVulkanBuffer *self);
gsk_vulkan_offscreen_op_count_vertex_data (GskVulkanOp *op,
gsize n_bytes)
{
- return n_bytes;
+ GskVulkanOffscreenOp *self = (GskVulkanOffscreenOp *) op;
+
+ return gsk_vulkan_render_pass_count_vertex_data (self->render_pass, n_bytes);
}
static void
GskVulkanRender *render,
guchar *data)
{
+ GskVulkanOffscreenOp *self = (GskVulkanOffscreenOp *) op;
+
+ gsk_vulkan_render_pass_collect_vertex_data (self->render_pass, render, data);
}
static void
#include "gdk/gdkvulkancontextprivate.h"
#define DESCRIPTOR_POOL_MAXITEMS 50000
+#define VERTEX_BUFFER_SIZE_STEP 128 * 1024 /* 128kB */
#define GDK_ARRAY_NAME gsk_descriptor_image_infos
#define GDK_ARRAY_TYPE_NAME GskDescriptorImageInfos
GskVulkanImage *target;
+ GskVulkanBuffer *vertex_buffer;
VkSampler samplers[3];
GskVulkanBuffer *storage_buffer;
guchar *storage_buffer_memory;
0, NULL);
}
+static void
+gsk_vulkan_render_collect_vertex_buffer (GskVulkanRender *self)
+{
+ gsize n_bytes;
+ guchar *data;
+
+ n_bytes = gsk_vulkan_render_pass_count_vertex_data (self->render_pass, 0);
+ if (n_bytes == 0)
+ return;
+
+ if (self->vertex_buffer && gsk_vulkan_buffer_get_size (self->vertex_buffer) < n_bytes)
+ g_clear_pointer (&self->vertex_buffer, gsk_vulkan_buffer_free);
+
+ if (self->vertex_buffer == NULL)
+ self->vertex_buffer = gsk_vulkan_buffer_new (self->vulkan, round_up (n_bytes, VERTEX_BUFFER_SIZE_STEP));
+
+ data = gsk_vulkan_buffer_map (self->vertex_buffer);
+ gsk_vulkan_render_pass_collect_vertex_data (self->render_pass, self, data);
+ gsk_vulkan_buffer_unmap (self->vertex_buffer);
+}
+
void
gsk_vulkan_render_draw_pass (GskVulkanRender *self,
GskVulkanRenderPass *pass,
command_buffer = gsk_vulkan_command_pool_get_buffer (self->command_pool);
+ if (self->vertex_buffer)
+ vkCmdBindVertexBuffers (command_buffer,
+ 0,
+ 1,
+ (VkBuffer[1]) {
+ gsk_vulkan_buffer_get_buffer (self->vertex_buffer)
+ },
+ (VkDeviceSize[1]) { 0 });
+
gsk_vulkan_render_pass_draw (pass, self, self->pipeline_layout, command_buffer);
gsk_vulkan_command_pool_submit_buffer (self->command_pool,
gsk_vulkan_render_prepare_descriptor_sets (self);
+ gsk_vulkan_render_collect_vertex_buffer (self);
+
gsk_vulkan_render_draw_pass (self,
self->render_pass,
self->fence);
gsk_vulkan_render_cleanup (self);
+ g_clear_pointer (&self->vertex_buffer, gsk_vulkan_buffer_free);
+
device = gdk_vulkan_context_get_device (self->vulkan);
g_hash_table_iter_init (&iter, self->pipeline_cache);
VkRenderPass render_pass;
VkFramebuffer framebuffer;
- GskVulkanBuffer *vertex_data;
};
struct _GskVulkanParseState
NULL,
&self->framebuffer);
- self->vertex_data = NULL;
-
#ifdef G_ENABLE_DEBUG
if (fallback_pixels_quark == 0)
{
vkDestroyFramebuffer (device, self->framebuffer, NULL);
vkDestroyRenderPass (device, self->render_pass, NULL);
- if (self->vertex_data)
- gsk_vulkan_buffer_free (self->vertex_data);
-
g_free (self);
}
}
}
-static inline gsize
-round_up (gsize number, gsize divisor)
-{
- return (number + divisor - 1) / divisor * divisor;
-}
-
gpointer
gsk_vulkan_render_pass_alloc_op (GskVulkanRenderPass *self,
gsize size)
}
}
-static gsize
-gsk_vulkan_render_pass_count_vertex_data (GskVulkanRenderPass *self)
+gsize
+gsk_vulkan_render_pass_count_vertex_data (GskVulkanRenderPass *self,
+ gsize n_bytes)
{
GskVulkanOp *op;
- gsize n_bytes;
- n_bytes = 0;
for (op = gsk_vulkan_render_pass_get_first_op (self); op; op = op->next)
{
n_bytes = gsk_vulkan_op_count_vertex_data (op, n_bytes);
return n_bytes;
}
-static void
+void
gsk_vulkan_render_pass_collect_vertex_data (GskVulkanRenderPass *self,
GskVulkanRender *render,
guchar *data)
}
}
-static GskVulkanBuffer *
-gsk_vulkan_render_pass_get_vertex_data (GskVulkanRenderPass *self,
- GskVulkanRender *render)
-{
- if (self->vertex_data == NULL)
- {
- gsize n_bytes;
- guchar *data;
-
- n_bytes = gsk_vulkan_render_pass_count_vertex_data (self);
- if (n_bytes == 0)
- return NULL;
-
- self->vertex_data = gsk_vulkan_buffer_new (self->vulkan, n_bytes);
- data = gsk_vulkan_buffer_map (self->vertex_data);
- gsk_vulkan_render_pass_collect_vertex_data (self, render, data);
- gsk_vulkan_buffer_unmap (self->vertex_data);
- }
-
- return self->vertex_data;
-}
-
void
gsk_vulkan_render_pass_reserve_descriptor_sets (GskVulkanRenderPass *self,
GskVulkanRender *render)
VkPipeline current_pipeline = VK_NULL_HANDLE;
const GskVulkanOpClass *current_pipeline_class = NULL;
const char *current_pipeline_clip_type = NULL;
- GskVulkanBuffer *vertex_buffer;
GskVulkanOp *op;
- vertex_buffer = gsk_vulkan_render_pass_get_vertex_data (self, render);
-
- if (vertex_buffer)
- vkCmdBindVertexBuffers (command_buffer,
- 0,
- 1,
- (VkBuffer[1]) {
- gsk_vulkan_buffer_get_buffer (vertex_buffer)
- },
- (VkDeviceSize[1]) { 0 });
-
for (op = gsk_vulkan_render_pass_get_first_op (self); op; op = op->next)
{
if (op->op_class->shader_name &&
GskVulkanUploader *uploader);
void gsk_vulkan_render_pass_reserve_descriptor_sets (GskVulkanRenderPass *self,
GskVulkanRender *render);
+gsize gsk_vulkan_render_pass_count_vertex_data (GskVulkanRenderPass *self,
+ gsize n_bytes);
+void gsk_vulkan_render_pass_collect_vertex_data (GskVulkanRenderPass *self,
+ GskVulkanRender *render,
+ guchar *data);
void gsk_vulkan_render_pass_draw (GskVulkanRenderPass *self,
GskVulkanRender *render,
VkPipelineLayout pipeline_layout,